Show the code
targets::tar_visnetwork(targets_only =T)Predictive Models for BoardGameGeek Ratings
targets::tar_visnetwork(targets_only =T)model_board = pins::board_folder("models",
versioned = T)
averageweight_fit =
vetiver_pin_read(
model_board,
"bgg_averageweight"
)
average_fit =
vetiver_pin_read(
model_board,
"bgg_average"
)
usersrated_fit =
vetiver_pin_read(
model_board,
"bgg_usersrated"
)valid_predictions |>
pivot_outcomes() |>
left_join(
games |>
bggUtils:::unnest_outcomes() |>
select(game_id, usersrated),
by = join_by(game_id)
) |>
plot_predictions(alpha = usersrated)+
theme(legend.title = element_text())targets_tracking_details(metrics = valid_metrics,
details = details) |>
select(model, minratings, outcome, any_of(c("rmse", "mae", "mape", "rsq", "ccc"))) |>
filter(minratings == 25) |>
select(minratings, everything()) |>
gt::gt() |>
gt::tab_options(quarto.disable_processing = T) |>
gtExtras::gt_theme_espn()| minratings | model | outcome | rmse | mae | mape | rsq | ccc |
|---|---|---|---|---|---|---|---|
| 25 | glmnet | average | 0.688 | 0.506 | 7.625 | 0.277 | 0.458 |
| 25 | glmnet | averageweight | 0.479 | 0.367 | 20.356 | 0.630 | 0.773 |
| 25 | glmnet+glmnet | bayesaverage | 0.300 | 0.174 | 2.878 | 0.412 | 0.633 |
| 25 | glmnet | usersrated | 1888.348 | 468.294 | 165.111 | 0.145 | 0.379 |
predictions =
upcoming_games |>
impute_averageweight(
model = averageweight_fit
) |>
predict_bayesaverage(
average_model = average_fit,
usersrated_model = usersrated_fit
)
predictions |>
filter(yearpublished >= 2024) |>
# this goddamn bah humbug game
filter(game_id != 388225) |>
predictions_dt(games = games) |>
add_colors()